import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import random
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import time
print(tf.config.list_physical_devices('GPU'))
[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]
train_dir = 'img/train'
test_dir = 'img/test'
valid_dir = 'img/valid'
BATCH_SIZE = 128
IMG_SIZE = (224, 224)
IMG_SHAPE = (IMG_SIZE + (3,))
train_datagen = ImageDataGenerator(rescale=1./255,
horizontal_flip=True,
zoom_range=0.3,
rotation_range=60,
width_shift_range=0.2,
height_shift_range=0.2)
train_generator = train_datagen.flow_from_directory(directory=train_dir,
batch_size=BATCH_SIZE,
shuffle=True,
target_size=IMG_SIZE,
class_mode='sparse')
num_training_classes = train_generator.num_classes
num_training_intances = len(train_generator.filenames)
num_training_intances, num_training_classes
Found 145971 images belonging to 190 classes.
(145971, 190)
valid_datagen = ImageDataGenerator(rescale=1./255)
val_generator = valid_datagen.flow_from_directory(directory=valid_dir,
batch_size=BATCH_SIZE,
shuffle=True,
target_size=IMG_SIZE,
class_mode='sparse')
Found 31279 images belonging to 190 classes.
test_datagen = ImageDataGenerator(rescale=1./255)
test_generator = test_datagen.flow_from_directory(directory=test_dir,
batch_size=1,
shuffle=True,
target_size=IMG_SIZE,
class_mode='sparse')
Found 31280 images belonging to 190 classes.
random_img = random.randint(0, BATCH_SIZE)
columns = 5
rows = 5
fig=plt.figure(figsize=(40, 40))
for i in range(1, columns * rows + 1):
fig.add_subplot(rows, columns, i)
augmented_image = train_generator[0][0][random_img]
plt.imshow(augmented_image)
plt.show()
base_model = tf.keras.applications.ResNet50V2(input_shape=IMG_SHAPE,
include_top=False, # Do not include the ImageNet classifier at the top.
weights='imagenet') # Load weights pre-trained on ImageNet.
# Freeze the base_model
for layer in base_model.layers[:-60]:
layer.trainable = False
# Print the architecture
base_model.summary()
Model: "resnet50v2"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 input_1[0][0]
__________________________________________________________________________________________________
conv1_conv (Conv2D) (None, 112, 112, 64) 9472 conv1_pad[0][0]
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 conv1_conv[0][0]
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 pool1_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_preact_bn (BatchNo (None, 56, 56, 64) 256 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_preact_relu (Activ (None, 56, 56, 64) 0 conv2_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4096 conv2_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64) 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36864 conv2_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64) 0 conv2_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_out (Add) (None, 56, 56, 256) 0 conv2_block1_0_conv[0][0]
conv2_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_preact_bn (BatchNo (None, 56, 56, 256) 1024 conv2_block1_out[0][0]
__________________________________________________________________________________________________
conv2_block2_preact_relu (Activ (None, 56, 56, 256) 0 conv2_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16384 conv2_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64) 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36864 conv2_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64) 0 conv2_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_out (Add) (None, 56, 56, 256) 0 conv2_block1_out[0][0]
conv2_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_preact_bn (BatchNo (None, 56, 56, 256) 1024 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_preact_relu (Activ (None, 56, 56, 256) 0 conv2_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16384 conv2_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64) 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, 28, 28, 64) 36864 conv2_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 28, 28, 64) 256 conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 28, 28, 64) 0 conv2_block3_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 28, 28, 256) 0 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D) (None, 28, 28, 256) 16640 conv2_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_out (Add) (None, 28, 28, 256) 0 max_pooling2d[0][0]
conv2_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_preact_bn (BatchNo (None, 28, 28, 256) 1024 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_preact_relu (Activ (None, 28, 28, 256) 0 conv3_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32768 conv3_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128) 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128) 0 conv3_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 conv3_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_out (Add) (None, 28, 28, 512) 0 conv3_block1_0_conv[0][0]
conv3_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block1_out[0][0]
__________________________________________________________________________________________________
conv3_block2_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128) 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128) 0 conv3_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_out (Add) (None, 28, 28, 512) 0 conv3_block1_out[0][0]
conv3_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block2_out[0][0]
__________________________________________________________________________________________________
conv3_block3_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128) 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128) 0 conv3_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_out (Add) (None, 28, 28, 512) 0 conv3_block2_out[0][0]
conv3_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block4_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block4_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128) 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, 14, 14, 128) 147456 conv3_block4_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 14, 14, 128) 512 conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 14, 14, 128) 0 conv3_block4_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, 14, 14, 512) 0 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D) (None, 14, 14, 512) 66048 conv3_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_out (Add) (None, 14, 14, 512) 0 max_pooling2d_1[0][0]
conv3_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_preact_bn (BatchNo (None, 14, 14, 512) 2048 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_preact_relu (Activ (None, 14, 14, 512) 0 conv4_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131072 conv4_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256) 0 conv4_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256) 0 conv4_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024) 525312 conv4_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_out (Add) (None, 14, 14, 1024) 0 conv4_block1_0_conv[0][0]
conv4_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block1_out[0][0]
__________________________________________________________________________________________________
conv4_block2_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256) 0 conv4_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256) 0 conv4_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_out (Add) (None, 14, 14, 1024) 0 conv4_block1_out[0][0]
conv4_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block2_out[0][0]
__________________________________________________________________________________________________
conv4_block3_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256) 0 conv4_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256) 0 conv4_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_out (Add) (None, 14, 14, 1024) 0 conv4_block2_out[0][0]
conv4_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block3_out[0][0]
__________________________________________________________________________________________________
conv4_block4_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block4_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block4_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256) 0 conv4_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block4_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256) 0 conv4_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_out (Add) (None, 14, 14, 1024) 0 conv4_block3_out[0][0]
conv4_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block5_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block5_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block5_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256) 0 conv4_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block5_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256) 0 conv4_block5_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block5_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_out (Add) (None, 14, 14, 1024) 0 conv4_block4_out[0][0]
conv4_block5_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block6_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block6_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256) 0 conv4_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D) (None, 7, 7, 256) 589824 conv4_block6_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 7, 7, 256) 1024 conv4_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 7, 7, 256) 0 conv4_block6_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D) (None, 7, 7, 1024) 0 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D) (None, 7, 7, 1024) 263168 conv4_block6_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_out (Add) (None, 7, 7, 1024) 0 max_pooling2d_2[0][0]
conv4_block6_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_preact_bn (BatchNo (None, 7, 7, 1024) 4096 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_preact_relu (Activ (None, 7, 7, 1024) 0 conv5_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524288 conv5_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512) 0 conv5_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512) 0 conv5_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 conv5_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_out (Add) (None, 7, 7, 2048) 0 conv5_block1_0_conv[0][0]
conv5_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_preact_bn (BatchNo (None, 7, 7, 2048) 8192 conv5_block1_out[0][0]
__________________________________________________________________________________________________
conv5_block2_preact_relu (Activ (None, 7, 7, 2048) 0 conv5_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1048576 conv5_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512) 0 conv5_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512) 0 conv5_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_out (Add) (None, 7, 7, 2048) 0 conv5_block1_out[0][0]
conv5_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_preact_bn (BatchNo (None, 7, 7, 2048) 8192 conv5_block2_out[0][0]
__________________________________________________________________________________________________
conv5_block3_preact_relu (Activ (None, 7, 7, 2048) 0 conv5_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1048576 conv5_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512) 0 conv5_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512) 0 conv5_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_out (Add) (None, 7, 7, 2048) 0 conv5_block2_out[0][0]
conv5_block3_3_conv[0][0]
__________________________________________________________________________________________________
post_bn (BatchNormalization) (None, 7, 7, 2048) 8192 conv5_block3_out[0][0]
__________________________________________________________________________________________________
post_relu (Activation) (None, 7, 7, 2048) 0 post_bn[0][0]
==================================================================================================
Total params: 23,564,800
Trainable params: 17,207,296
Non-trainable params: 6,357,504
__________________________________________________________________________________________________
# # Unfreeze the last x layers
# for layer in base_model.layers[-10:]:
# layer.trainable = True
inputs = tf.keras.Input(shape=IMG_SHAPE)
x = base_model(inputs, training=False)
x = tf.keras.layers.GlobalAvgPool2D()(x)
x = tf.keras.layers.Dense(num_training_classes, activation='relu')(x)
x = tf.keras.layers.Dropout(.1)(x)
# x = tf.keras.layers.Dense(512, activation='relu')(x)
# x = tf.keras.layers.Dropout(.1)(x)
outputs = tf.keras.layers.Dense(num_training_classes,
activation=tf.keras.activations.softmax,
kernel_regularizer=tf.keras.regularizers.l2(0.001))(x)
model = tf.keras.models.Model(inputs, outputs)
model.summary()
Model: "model" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) [(None, 224, 224, 3)] 0 _________________________________________________________________ resnet50v2 (Functional) (None, 7, 7, 2048) 23564800 _________________________________________________________________ global_average_pooling2d (Gl (None, 2048) 0 _________________________________________________________________ dense (Dense) (None, 190) 389310 _________________________________________________________________ dropout (Dropout) (None, 190) 0 _________________________________________________________________ dense_1 (Dense) (None, 190) 36290 ================================================================= Total params: 23,990,400 Trainable params: 17,632,896 Non-trainable params: 6,357,504 _________________________________________________________________
# Define EarlyStopping as the strategy to avoid overfitting
early_stopping = tf.keras.callbacks.EarlyStopping(monitor = 'val_loss',
patience = 5,
restore_best_weights=True)
# Define ReduceLROnPlateau as the learning schedule strategy
lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(factor = 0.5,
patience = 2)
class SGDLRChecker(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
lr = self.model.optimizer.lr
decay = self.model.optimizer.decay
iterations = self.model.optimizer.iterations
lr_with_decay = lr / (1. + decay * tf.keras.backend.cast(iterations, tf.keras.backend.dtype(decay)))
print("Learning Rate = ", tf.keras.backend.eval(lr_with_decay))
lr_checker = SGDLRChecker()
model_name = time.strftime("%d_%m_%Y-%H_%M_%S")
model_checkpoint = tf.keras.callbacks.ModelCheckpoint('checkpoints/' + model_name + '.h5',
monitor = 'val_loss',
save_best_only=True,
verbose=1)
# Compute the class weights
from sklearn.utils import class_weight
class_weights = dict(enumerate(class_weight.compute_class_weight('balanced',
classes=np.unique(train_generator.classes),
y=train_generator.classes)))
class_weights
{0: 0.7539434946542017,
1: 1.2677696717040126,
2: 2.6583682389364416,
3: 1.0438429633867277,
4: 0.74014298752662,
5: 3.036634075306844,
6: 0.74014298752662,
7: 2.5189128559102674,
8: 2.7536502546689303,
9: 3.7660216718266253,
10: 3.161598440545809,
11: 3.747650834403081,
12: 0.7261516267038106,
13: 0.7140041087849736,
14: 0.74014298752662,
15: 1.4831436699857752,
16: 1.0760061919504644,
17: 2.2865131578947366,
18: 3.1746628969117006,
19: 1.141557832173301,
20: 0.7458916709248851,
21: 1.0042724458204335,
22: 1.0003495065789474,
23: 0.7234165923282784,
24: 1.2156145902731512,
25: 0.7365948428117273,
26: 1.4888922888616891,
27: 0.7323817169233857,
28: 0.7344822381000302,
29: 0.9774407392527119,
30: 0.7247815292949354,
31: 0.7302931759055433,
32: 0.7358892921960073,
33: 0.7275269138755981,
34: 1.3431266102318733,
35: 0.7509955240006174,
36: 0.7365948428117273,
37: 0.7193524541691307,
38: 0.7358892921960073,
39: 2.3711988304093565,
40: 1.0928427042000448,
41: 2.9548785425101216,
42: 2.3140615091946732,
43: 0.7268386197281282,
44: 0.7282165128460962,
45: 0.711359649122807,
46: 0.7380100106173214,
47: 0.7295996401259559,
48: 0.7502621299342105,
49: 0.7234165923282784,
50: 0.7652075906898721,
51: 2.5780819498410454,
52: 2.3933595671421544,
53: 0.9167880919482477,
54: 2.9323222177581356,
55: 0.74807051709117,
56: 1.6451143919756566,
57: 0.7282165128460962,
58: 0.7344822381000302,
59: 0.7282165128460962,
60: 0.720026636412963,
61: 2.7937033492822967,
62: 0.74807051709117,
63: 1.6922211917458845,
64: 0.7289074203535404,
65: 0.716000392406926,
66: 1.3088048058818256,
67: 2.0059227703724063,
68: 0.7140041087849736,
69: 1.2292294736842104,
70: 0.7415718349928876,
71: 0.7330805544395339,
72: 2.1164419312744673,
73: 0.7309880314487456,
74: 0.720702083539054,
75: 1.750041961395516,
76: 0.711359649122807,
77: 0.8954177401545823,
78: 0.7289074203535404,
79: 0.7539434946542017,
80: 0.720702083539054,
81: 0.7380100106173214,
82: 0.7344822381000302,
83: 1.7460645933014354,
84: 1.5489282682512733,
85: 0.7373017476512779,
86: 1.8292105263157894,
87: 0.7282165128460962,
88: 0.7487996306555863,
89: 1.0928427042000448,
90: 2.9777845777233782,
91: 0.7358892921960073,
92: 0.7234165923282784,
93: 0.9449796076908138,
94: 3.1878357720026207,
95: 0.7295996401259559,
96: 2.7536502546689303,
97: 2.1164419312744673,
98: 3.2831983805668017,
99: 0.7254659311167437,
100: 2.2662785281788542,
101: 0.7153337253748897,
102: 0.9799342105263158,
103: 1.2431527848748083,
104: 1.0582209656372337,
105: 1.533469902300662,
106: 0.7247815292949354,
107: 1.441404167078108,
108: 1.7381638485353657,
109: 2.1222884559464963,
110: 0.7422883295194508,
111: 3.214512221977538,
112: 3.2553746654772526,
113: 0.7093891237789766,
114: 2.0378472706966355,
115: 3.214512221977538,
116: 2.9323222177581356,
117: 1.7908354803091646,
118: 0.8368937048503612,
119: 1.1675811870100783,
120: 0.7744641341256366,
121: 3.6759254595819693,
122: 0.7173374613003096,
123: 1.7187212998940304,
124: 0.8323601528197525,
125: 0.720702083539054,
126: 3.3548839347276487,
127: 0.7316842105263158,
128: 0.7107015920930911,
129: 0.9189813648954923,
130: 0.7394306266146599,
131: 0.7394306266146599,
132: 1.5273726064664643,
133: 0.7444461444308446,
134: 0.7330805544395339,
135: 0.7430062099155045,
136: 0.7337807268888554,
137: 0.7444461444308446,
138: 1.6665258591163374,
139: 0.6990613476366074,
140: 0.7614156799332324,
141: 1.2698651587646803,
142: 2.695678670360111,
143: 0.7289074203535404,
144: 0.7408567223265493,
145: 0.7330805544395339,
146: 0.8651671408250355,
147: 0.7373017476512779,
148: 0.7561697057604642,
149: 0.8479783896828164,
150: 0.7358892921960073,
151: 0.7213787991104522,
152: 1.5583537952386035,
153: 2.9101076555023924,
154: 2.7937033492822967,
155: 2.0764011379800853,
156: 2.1164419312744673,
157: 3.4297697368421054,
158: 0.7022563263735206,
159: 0.7330805544395339,
160: 3.6410825642304814,
161: 0.7275269138755981,
162: 1.8119538232373387,
163: 0.840556259357365,
164: 1.3314877314603666,
165: 2.1222884559464963,
166: 2.134078947368421,
167: 0.7289074203535404,
168: 3.2692273236282197,
169: 3.729458354624425,
170: 0.7247815292949354,
171: 0.7344822381000302,
172: 0.7254659311167437,
173: 0.7153337253748897,
174: 0.74014298752662,
175: 0.7275269138755981,
176: 0.7415718349928876,
177: 3.1230423620025674,
178: 1.2117798439315957,
179: 0.7487996306555863,
180: 1.1747223563495897,
181: 0.7532043343653251,
182: 1.0270968195890797,
183: 0.7495301668806161,
184: 0.7289074203535404,
185: 0.7323817169233857,
186: 0.7247815292949354,
187: 0.7268386197281282,
188: 0.7380100106173214,
189: 0.7408567223265493}
# Define the optimizer, loss and metrics
LEARNING_RATE = 0.01
model.compile(optimizer=tf.keras.optimizers.SGD(lr=LEARNING_RATE, momentum=0.9, decay=1e-4),
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
metrics=['accuracy'])
EPOCHS = 100
history = model.fit(train_generator,
epochs=EPOCHS,
callbacks=[early_stopping, lr_scheduler, lr_checker, model_checkpoint],
validation_data=val_generator,
class_weight=class_weights
)
Epoch 1/100 1141/1141 [==============================] - 2159s 2s/step - loss: 5.3116 - accuracy: 0.0119 - val_loss: 3.7215 - val_accuracy: 0.1566 Learning Rate = 0.008975855 Epoch 00001: val_loss improved from inf to 3.72147, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 2/100 1141/1141 [==============================] - 1457s 1s/step - loss: 3.6215 - accuracy: 0.1794 - val_loss: 2.5512 - val_accuracy: 0.3794 Learning Rate = 0.008141996 Epoch 00002: val_loss improved from 3.72147 to 2.55118, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 3/100 1141/1141 [==============================] - 1456s 1s/step - loss: 2.5347 - accuracy: 0.3791 - val_loss: 1.8215 - val_accuracy: 0.5442 Learning Rate = 0.0074499 Epoch 00003: val_loss improved from 2.55118 to 1.82154, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 4/100 1141/1141 [==============================] - 1453s 1s/step - loss: 2.0268 - accuracy: 0.4927 - val_loss: 1.4227 - val_accuracy: 0.6376 Learning Rate = 0.006866246 Epoch 00004: val_loss improved from 1.82154 to 1.42269, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 5/100 1141/1141 [==============================] - 1454s 1s/step - loss: 1.6998 - accuracy: 0.5660 - val_loss: 1.2214 - val_accuracy: 0.6871 Learning Rate = 0.0063673994 Epoch 00005: val_loss improved from 1.42269 to 1.22144, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 6/100 1141/1141 [==============================] - 1457s 1s/step - loss: 1.4721 - accuracy: 0.6179 - val_loss: 1.0527 - val_accuracy: 0.7273 Learning Rate = 0.005936127 Epoch 00006: val_loss improved from 1.22144 to 1.05270, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 7/100 1141/1141 [==============================] - 1448s 1s/step - loss: 1.3217 - accuracy: 0.6574 - val_loss: 1.0012 - val_accuracy: 0.7421 Learning Rate = 0.0055595706 Epoch 00007: val_loss improved from 1.05270 to 1.00119, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 8/100 1141/1141 [==============================] - 1460s 1s/step - loss: 1.2013 - accuracy: 0.6827 - val_loss: 0.9106 - val_accuracy: 0.7656 Learning Rate = 0.0052279383 Epoch 00008: val_loss improved from 1.00119 to 0.91064, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 9/100 1141/1141 [==============================] - 1456s 1s/step - loss: 1.0925 - accuracy: 0.7111 - val_loss: 0.8214 - val_accuracy: 0.7870 Learning Rate = 0.0049336427 Epoch 00009: val_loss improved from 0.91064 to 0.82135, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 10/100 1141/1141 [==============================] - 1456s 1s/step - loss: 1.0242 - accuracy: 0.7271 - val_loss: 0.7867 - val_accuracy: 0.7958 Learning Rate = 0.004670715 Epoch 00010: val_loss improved from 0.82135 to 0.78667, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 11/100 1141/1141 [==============================] - 1457s 1s/step - loss: 0.9424 - accuracy: 0.7471 - val_loss: 0.7219 - val_accuracy: 0.8127 Learning Rate = 0.0044343933 Epoch 00011: val_loss improved from 0.78667 to 0.72189, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 12/100 1141/1141 [==============================] - 1460s 1s/step - loss: 0.8857 - accuracy: 0.7604 - val_loss: 0.7403 - val_accuracy: 0.8113 Learning Rate = 0.004220834 Epoch 00012: val_loss did not improve from 0.72189 Epoch 13/100 1141/1141 [==============================] - 1454s 1s/step - loss: 0.8467 - accuracy: 0.7670 - val_loss: 0.6583 - val_accuracy: 0.8303 Learning Rate = 0.0040268996 Epoch 00013: val_loss improved from 0.72189 to 0.65830, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 14/100 1141/1141 [==============================] - 1456s 1s/step - loss: 0.8039 - accuracy: 0.7809 - val_loss: 0.6530 - val_accuracy: 0.8316 Learning Rate = 0.003850004 Epoch 00014: val_loss improved from 0.65830 to 0.65301, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 15/100 1141/1141 [==============================] - 1455s 1s/step - loss: 0.7523 - accuracy: 0.7923 - val_loss: 0.6267 - val_accuracy: 0.8396 Learning Rate = 0.0036879955 Epoch 00015: val_loss improved from 0.65301 to 0.62673, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 16/100 1141/1141 [==============================] - 1456s 1s/step - loss: 0.7166 - accuracy: 0.8027 - val_loss: 0.5953 - val_accuracy: 0.8465 Learning Rate = 0.0035390714 Epoch 00016: val_loss improved from 0.62673 to 0.59534, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 17/100 1141/1141 [==============================] - 1460s 1s/step - loss: 0.6838 - accuracy: 0.8103 - val_loss: 0.5702 - val_accuracy: 0.8512 Learning Rate = 0.0034017074 Epoch 00017: val_loss improved from 0.59534 to 0.57020, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 18/100 1141/1141 [==============================] - 1455s 1s/step - loss: 0.6480 - accuracy: 0.8177 - val_loss: 0.5684 - val_accuracy: 0.8525 Learning Rate = 0.0032746089 Epoch 00018: val_loss improved from 0.57020 to 0.56841, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 19/100 1141/1141 [==============================] - 1456s 1s/step - loss: 0.6272 - accuracy: 0.8233 - val_loss: 0.5575 - val_accuracy: 0.8529 Learning Rate = 0.0031566655 Epoch 00019: val_loss improved from 0.56841 to 0.55753, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 20/100 1141/1141 [==============================] - 1455s 1s/step - loss: 0.6203 - accuracy: 0.8263 - val_loss: 0.5385 - val_accuracy: 0.8620 Learning Rate = 0.0030469226 Epoch 00020: val_loss improved from 0.55753 to 0.53846, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 21/100 1141/1141 [==============================] - 1457s 1s/step - loss: 0.5819 - accuracy: 0.8340 - val_loss: 0.5169 - val_accuracy: 0.8670 Learning Rate = 0.002944554 Epoch 00021: val_loss improved from 0.53846 to 0.51685, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 22/100 1141/1141 [==============================] - 1454s 1s/step - loss: 0.5630 - accuracy: 0.8392 - val_loss: 0.5102 - val_accuracy: 0.8709 Learning Rate = 0.0028488406 Epoch 00022: val_loss improved from 0.51685 to 0.51017, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 23/100 1141/1141 [==============================] - 1533s 1s/step - loss: 0.5364 - accuracy: 0.8450 - val_loss: 0.5026 - val_accuracy: 0.8708 Learning Rate = 0.0027591535 Epoch 00023: val_loss improved from 0.51017 to 0.50261, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 24/100 1141/1141 [==============================] - 1554s 1s/step - loss: 0.5213 - accuracy: 0.8503 - val_loss: 0.4965 - val_accuracy: 0.8731 Learning Rate = 0.0026749412 Epoch 00024: val_loss improved from 0.50261 to 0.49650, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 25/100 1141/1141 [==============================] - 1525s 1s/step - loss: 0.5062 - accuracy: 0.8537 - val_loss: 0.4998 - val_accuracy: 0.8720 Learning Rate = 0.002595717 Epoch 00025: val_loss did not improve from 0.49650 Epoch 26/100 1141/1141 [==============================] - 1493s 1s/step - loss: 0.4972 - accuracy: 0.8571 - val_loss: 0.4815 - val_accuracy: 0.8776 Learning Rate = 0.0025210509 Epoch 00026: val_loss improved from 0.49650 to 0.48152, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 27/100 1141/1141 [==============================] - 1558s 1s/step - loss: 0.4695 - accuracy: 0.8633 - val_loss: 0.4849 - val_accuracy: 0.8778 Learning Rate = 0.00245056 Epoch 00027: val_loss did not improve from 0.48152 Epoch 28/100 1141/1141 [==============================] - 1559s 1s/step - loss: 0.4638 - accuracy: 0.8640 - val_loss: 0.4617 - val_accuracy: 0.8840 Learning Rate = 0.0023839038 Epoch 00028: val_loss improved from 0.48152 to 0.46169, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 29/100 1141/1141 [==============================] - 1902s 2s/step - loss: 0.4510 - accuracy: 0.8680 - val_loss: 0.4677 - val_accuracy: 0.8817 Learning Rate = 0.0023207779 Epoch 00029: val_loss did not improve from 0.46169 Epoch 30/100 1141/1141 [==============================] - 1897s 2s/step - loss: 0.4288 - accuracy: 0.8743 - val_loss: 0.4505 - val_accuracy: 0.8852 Learning Rate = 0.002260909 Epoch 00030: val_loss improved from 0.46169 to 0.45051, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 31/100 1141/1141 [==============================] - 1521s 1s/step - loss: 0.4273 - accuracy: 0.8734 - val_loss: 0.4499 - val_accuracy: 0.8850 Learning Rate = 0.002204051 Epoch 00031: val_loss improved from 0.45051 to 0.44992, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 32/100 1141/1141 [==============================] - 1508s 1s/step - loss: 0.4142 - accuracy: 0.8778 - val_loss: 0.4393 - val_accuracy: 0.8874 Learning Rate = 0.0021499828 Epoch 00032: val_loss improved from 0.44992 to 0.43930, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 33/100 1141/1141 [==============================] - 1510s 1s/step - loss: 0.4001 - accuracy: 0.8813 - val_loss: 0.4488 - val_accuracy: 0.8841 Learning Rate = 0.0020985038 Epoch 00033: val_loss did not improve from 0.43930 Epoch 34/100 1141/1141 [==============================] - 1515s 1s/step - loss: 0.3918 - accuracy: 0.8831 - val_loss: 0.4237 - val_accuracy: 0.8922 Learning Rate = 0.0020494321 Epoch 00034: val_loss improved from 0.43930 to 0.42367, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 35/100 1141/1141 [==============================] - 1497s 1s/step - loss: 0.3839 - accuracy: 0.8857 - val_loss: 0.4327 - val_accuracy: 0.8900 Learning Rate = 0.0020026034 Epoch 00035: val_loss did not improve from 0.42367 Epoch 36/100 1141/1141 [==============================] - 1521s 1s/step - loss: 0.3698 - accuracy: 0.8900 - val_loss: 0.4388 - val_accuracy: 0.8902 Learning Rate = 0.0009789334 Epoch 00036: val_loss did not improve from 0.42367 Epoch 37/100 1141/1141 [==============================] - 1504s 1s/step - loss: 0.3313 - accuracy: 0.9005 - val_loss: 0.4006 - val_accuracy: 0.9026 Learning Rate = 0.00095754256 Epoch 00037: val_loss improved from 0.42367 to 0.40063, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 38/100 1141/1141 [==============================] - 1551s 1s/step - loss: 0.3100 - accuracy: 0.9079 - val_loss: 0.3918 - val_accuracy: 0.9046 Learning Rate = 0.0009370666 Epoch 00038: val_loss improved from 0.40063 to 0.39179, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 39/100 1141/1141 [==============================] - 1524s 1s/step - loss: 0.3018 - accuracy: 0.9061 - val_loss: 0.3929 - val_accuracy: 0.9044 Learning Rate = 0.0009174481 Epoch 00039: val_loss did not improve from 0.39179 Epoch 40/100 1141/1141 [==============================] - 1533s 1s/step - loss: 0.2904 - accuracy: 0.9107 - val_loss: 0.3848 - val_accuracy: 0.9059 Learning Rate = 0.0008986341 Epoch 00040: val_loss improved from 0.39179 to 0.38482, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 41/100 1141/1141 [==============================] - 1533s 1s/step - loss: 0.2882 - accuracy: 0.9111 - val_loss: 0.3788 - val_accuracy: 0.9071 Learning Rate = 0.0008805762 Epoch 00041: val_loss improved from 0.38482 to 0.37876, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 42/100 1141/1141 [==============================] - 1507s 1s/step - loss: 0.2851 - accuracy: 0.9116 - val_loss: 0.3803 - val_accuracy: 0.9078 Learning Rate = 0.00086322986 Epoch 00042: val_loss did not improve from 0.37876 Epoch 43/100 1141/1141 [==============================] - 1531s 1s/step - loss: 0.2799 - accuracy: 0.9132 - val_loss: 0.3840 - val_accuracy: 0.9062 Learning Rate = 0.00042327683 Epoch 00043: val_loss did not improve from 0.37876 Epoch 44/100 1141/1141 [==============================] - 1538s 1s/step - loss: 0.2663 - accuracy: 0.9188 - val_loss: 0.3677 - val_accuracy: 0.9100 Learning Rate = 0.0004152548 Epoch 00044: val_loss improved from 0.37876 to 0.36769, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 45/100 1141/1141 [==============================] - 1550s 1s/step - loss: 0.2551 - accuracy: 0.9198 - val_loss: 0.3655 - val_accuracy: 0.9106 Learning Rate = 0.00040753116 Epoch 00045: val_loss improved from 0.36769 to 0.36549, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 46/100 1141/1141 [==============================] - 1503s 1s/step - loss: 0.2482 - accuracy: 0.9218 - val_loss: 0.3638 - val_accuracy: 0.9103 Learning Rate = 0.0004000896 Epoch 00046: val_loss improved from 0.36549 to 0.36379, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 47/100 1141/1141 [==============================] - 1446s 1s/step - loss: 0.2470 - accuracy: 0.9232 - val_loss: 0.3662 - val_accuracy: 0.9110 Learning Rate = 0.00039291495 Epoch 00047: val_loss did not improve from 0.36379 Epoch 48/100 1141/1141 [==============================] - 1443s 1s/step - loss: 0.2416 - accuracy: 0.9253 - val_loss: 0.3639 - val_accuracy: 0.9117 Learning Rate = 0.00019299654 Epoch 00048: val_loss did not improve from 0.36379 Epoch 49/100 1141/1141 [==============================] - 1447s 1s/step - loss: 0.2367 - accuracy: 0.9262 - val_loss: 0.3583 - val_accuracy: 0.9126 Learning Rate = 0.00018965543 Epoch 00049: val_loss improved from 0.36379 to 0.35827, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 50/100 1141/1141 [==============================] - 1525s 1s/step - loss: 0.2354 - accuracy: 0.9267 - val_loss: 0.3595 - val_accuracy: 0.9126 Learning Rate = 0.00018642803 Epoch 00050: val_loss did not improve from 0.35827 Epoch 51/100 1141/1141 [==============================] - 1457s 1s/step - loss: 0.2269 - accuracy: 0.9294 - val_loss: 0.3566 - val_accuracy: 0.9133 Learning Rate = 0.00018330864 Epoch 00051: val_loss improved from 0.35827 to 0.35662, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 52/100 1141/1141 [==============================] - 1516s 1s/step - loss: 0.2271 - accuracy: 0.9289 - val_loss: 0.3575 - val_accuracy: 0.9127 Learning Rate = 0.00018029193 Epoch 00052: val_loss did not improve from 0.35662 Epoch 53/100 1141/1141 [==============================] - 1528s 1s/step - loss: 0.2312 - accuracy: 0.9276 - val_loss: 0.3560 - val_accuracy: 0.9139 Learning Rate = 0.00017737289 Epoch 00053: val_loss improved from 0.35662 to 0.35598, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 54/100 1141/1141 [==============================] - 1493s 1s/step - loss: 0.2289 - accuracy: 0.9275 - val_loss: 0.3566 - val_accuracy: 0.9128 Learning Rate = 0.00017454688 Epoch 00054: val_loss did not improve from 0.35598 Epoch 55/100 1141/1141 [==============================] - 1539s 1s/step - loss: 0.2242 - accuracy: 0.9290 - val_loss: 0.3579 - val_accuracy: 0.9139 Learning Rate = 8.590475e-05 Epoch 00055: val_loss did not improve from 0.35598 Epoch 56/100 1141/1141 [==============================] - 1508s 1s/step - loss: 0.2218 - accuracy: 0.9309 - val_loss: 0.3535 - val_accuracy: 0.9148 Learning Rate = 8.457833e-05 Epoch 00056: val_loss improved from 0.35598 to 0.35350, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 57/100 1141/1141 [==============================] - 1469s 1s/step - loss: 0.2237 - accuracy: 0.9299 - val_loss: 0.3532 - val_accuracy: 0.9147 Learning Rate = 8.3292245e-05 Epoch 00057: val_loss improved from 0.35350 to 0.35317, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 58/100 1141/1141 [==============================] - 1523s 1s/step - loss: 0.2166 - accuracy: 0.9305 - val_loss: 0.3527 - val_accuracy: 0.9148 Learning Rate = 8.204469e-05 Epoch 00058: val_loss improved from 0.35317 to 0.35266, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 59/100 1141/1141 [==============================] - 1520s 1s/step - loss: 0.2205 - accuracy: 0.9307 - val_loss: 0.3528 - val_accuracy: 0.9153 Learning Rate = 8.083395e-05 Epoch 00059: val_loss did not improve from 0.35266 Epoch 60/100 1141/1141 [==============================] - 1519s 1s/step - loss: 0.2194 - accuracy: 0.9311 - val_loss: 0.3540 - val_accuracy: 0.9149 Learning Rate = 3.9829214e-05 Epoch 00060: val_loss did not improve from 0.35266 Epoch 61/100 1141/1141 [==============================] - 1547s 1s/step - loss: 0.2173 - accuracy: 0.9331 - val_loss: 0.3535 - val_accuracy: 0.9149 Learning Rate = 3.9258302e-05 Epoch 00061: val_loss did not improve from 0.35266 Epoch 62/100 1141/1141 [==============================] - 1445s 1s/step - loss: 0.2178 - accuracy: 0.9318 - val_loss: 0.3526 - val_accuracy: 0.9156 Learning Rate = 1.9351763e-05 Epoch 00062: val_loss improved from 0.35266 to 0.35260, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 63/100 1141/1141 [==============================] - 1444s 1s/step - loss: 0.2179 - accuracy: 0.9321 - val_loss: 0.3525 - val_accuracy: 0.9160 Learning Rate = 1.9082107e-05 Epoch 00063: val_loss improved from 0.35260 to 0.35254, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 64/100 1141/1141 [==============================] - 1444s 1s/step - loss: 0.2150 - accuracy: 0.9308 - val_loss: 0.3529 - val_accuracy: 0.9153 Learning Rate = 1.881986e-05 Epoch 00064: val_loss did not improve from 0.35254 Epoch 65/100 1141/1141 [==============================] - 1444s 1s/step - loss: 0.2140 - accuracy: 0.9325 - val_loss: 0.3527 - val_accuracy: 0.9157 Learning Rate = 9.282362e-06 Epoch 00065: val_loss did not improve from 0.35254 Epoch 66/100 1141/1141 [==============================] - 1448s 1s/step - loss: 0.2194 - accuracy: 0.9308 - val_loss: 0.3524 - val_accuracy: 0.9155 Learning Rate = 9.158207e-06 Epoch 00066: val_loss improved from 0.35254 to 0.35237, saving model to checkpoints\08_04_2021-23_23_22.h5 Epoch 67/100 1141/1141 [==============================] - 1442s 1s/step - loss: 0.2125 - accuracy: 0.9333 - val_loss: 0.3524 - val_accuracy: 0.9156 Learning Rate = 9.03733e-06 Epoch 00067: val_loss did not improve from 0.35237 Epoch 68/100 1141/1141 [==============================] - 1446s 1s/step - loss: 0.2139 - accuracy: 0.9334 - val_loss: 0.3525 - val_accuracy: 0.9154 Learning Rate = 4.4598e-06 Epoch 00068: val_loss did not improve from 0.35237 Epoch 69/100 1141/1141 [==============================] - 1444s 1s/step - loss: 0.2131 - accuracy: 0.9333 - val_loss: 0.3525 - val_accuracy: 0.9154 Learning Rate = 4.40245e-06 Epoch 00069: val_loss did not improve from 0.35237 Epoch 70/100 1141/1141 [==============================] - 1443s 1s/step - loss: 0.2137 - accuracy: 0.9317 - val_loss: 0.3525 - val_accuracy: 0.9158 Learning Rate = 2.173278e-06 Epoch 00070: val_loss did not improve from 0.35237 Epoch 71/100 1141/1141 [==============================] - 1446s 1s/step - loss: 0.2158 - accuracy: 0.9326 - val_loss: 0.3524 - val_accuracy: 0.9157 Learning Rate = 2.1460316e-06 Epoch 00071: val_loss did not improve from 0.35237
pd.DataFrame(history.history).plot()
<AxesSubplot:>
model.evaluate(test_generator)
31280/31280 [==============================] - 468s 15ms/step - loss: 0.3479 - accuracy: 0.9178
[0.34794852137565613, 0.9178388714790344]
columns = 4
rows = 4
fig=plt.figure(figsize=(40, 40))
class_descriptions = list(test_generator.class_indices.keys())
# Iterate in rows and columns
for i in range(1, columns * rows + 1):
# reserve an spot for the image
fig.add_subplot(rows, columns, i)
plt.axis('off')
# retrieves the test image and the true label
img, true_label = test_generator.next()
# makes the prediction using the image
predicted_label = np.argmax(model.predict(img), axis=-1)[0]
# get a description for the prediction
prediction_description = class_descriptions[predicted_label]
# get a description for the true value
true_description = class_descriptions[int(true_label)]
# set the title according to the prediction
if true_label == predicted_label:
plt.title("It is a {}".format(prediction_description), color='green')
else:
plt.title("Predicted: {} \n What really is: {}".format(prediction_description, true_description), color='red')
# plot the image
plt.imshow(img[0])
# show the plot
plt.show()
models = pd.DataFrame(class_descriptions, columns=['car_model'])
models.to_csv('190_car_models.csv')
models
| car_model | |
|---|---|
| 0 | AUDI A3 |
| 1 | AUDI A4 |
| 2 | AUDI A5 |
| 3 | AUDI Q3 |
| 4 | BMW 320I |
| ... | ... |
| 185 | VW - VOLKSWAGEN T-CROSS |
| 186 | VW - VOLKSWAGEN TIGUAN |
| 187 | VW - VOLKSWAGEN UP! |
| 188 | VW - VOLKSWAGEN VIRTUS |
| 189 | VW - VOLKSWAGEN VOYAGE |
190 rows × 1 columns
test_generator.reset()
true_labels, predicted_labels = [], []
total_imgs = len(test_generator.filenames)
idx = 0
while idx < total_imgs:
img, true_label = test_generator.next()
true_labels.append(true_label)
predicted_labels.append(np.argmax(model.predict(img), axis=-1)[0])
idx += 1
from sklearn.metrics import classification_report, confusion_matrix
print(classification_report(true_labels, predicted_labels, target_names=class_descriptions))
precision recall f1-score support
AUDI A3 0.90 0.86 0.88 240
AUDI A4 0.76 0.81 0.78 108
AUDI A5 0.77 0.85 0.81 80
AUDI Q3 0.97 0.96 0.97 161
BMW 320I 0.87 0.84 0.85 231
BMW 328I 0.51 0.63 0.56 52
BMW X1 0.96 0.93 0.95 230
BMW X3 0.87 0.92 0.89 64
BMW X5 0.76 0.92 0.83 51
BMW X6 0.94 0.83 0.88 53
CHERY QQ 0.98 0.98 0.98 43
CHERY TIGGO 2 0.98 0.98 0.98 44
CITROEN AIRCROSS 0.97 0.98 0.97 215
CITROEN C3 0.96 0.96 0.96 212
CITROEN C4 0.96 0.91 0.93 250
CITROEN C4 CACTUS 0.95 0.98 0.96 108
CITROEN XSARA 0.92 0.95 0.93 143
DODGE JOURNEY 0.77 0.82 0.79 66
DODGE RAM 0.89 0.89 0.89 44
FIAT 500 0.96 0.98 0.97 136
FIAT ARGO 0.97 0.99 0.98 236
FIAT BRAVO 0.92 0.95 0.93 147
FIAT CRONOS 0.95 0.97 0.96 168
FIAT DOBLO 0.99 1.00 0.99 205
FIAT DUCATO 0.96 0.93 0.94 137
FIAT FIORINO 0.96 0.97 0.96 244
FIAT FREEMONT 0.89 0.86 0.87 118
FIAT GRAND SIENA 0.92 0.97 0.95 241
FIAT IDEA 0.96 0.96 0.96 247
FIAT LINEA 0.97 0.98 0.98 182
FIAT MOBI 0.98 0.99 0.98 206
FIAT PALIO 0.85 0.79 0.82 227
FIAT PUNTO 0.97 0.97 0.97 237
FIAT SIENA 0.82 0.83 0.82 227
FIAT STILO 0.95 0.96 0.95 137
FIAT STRADA 0.92 0.94 0.93 235
FIAT TORO 0.98 0.98 0.98 248
FIAT UNO 0.93 0.94 0.93 238
FORD ECOSPORT 0.98 0.94 0.96 216
FORD EDGE 0.98 0.92 0.95 62
FORD ESCORT 0.92 0.86 0.89 161
FORD F-1000 0.88 0.96 0.92 52
FORD F-250 0.93 0.88 0.90 75
FORD FIESTA 0.95 0.89 0.92 218
FORD FOCUS 0.95 0.94 0.95 214
FORD FUSION 0.95 0.93 0.94 208
FORD KA 0.91 0.86 0.88 238
FORD KA+ 0.88 0.92 0.90 223
FORD RANGER 0.92 0.91 0.92 247
GM - CHEVROLET AGILE 0.97 0.97 0.97 244
GM - CHEVROLET ASTRA 0.93 0.90 0.91 239
GM - CHEVROLET BLAZER 0.80 0.84 0.82 70
GM - CHEVROLET CAMARO 0.94 0.95 0.94 63
GM - CHEVROLET CAPTIVA 0.97 0.98 0.98 199
GM - CHEVROLET CARAVAN 0.65 0.71 0.68 52
GM - CHEVROLET CELTA 0.86 0.91 0.88 219
GM - CHEVROLET CHEVETTE 0.76 0.90 0.83 87
GM - CHEVROLET CLASSIC 0.88 0.86 0.87 218
GM - CHEVROLET COBALT 0.96 0.96 0.96 236
GM - CHEVROLET CORSA 0.78 0.78 0.78 215
GM - CHEVROLET CRUZE 0.90 0.92 0.91 217
GM - CHEVROLET D-20 0.88 0.86 0.87 51
GM - CHEVROLET JOY 0.84 0.83 0.84 230
GM - CHEVROLET KADETT 0.95 0.89 0.92 98
GM - CHEVROLET MERIVA 0.97 0.98 0.97 217
GM - CHEVROLET MONTANA 0.97 0.96 0.96 212
GM - CHEVROLET MONZA 0.81 0.90 0.85 133
GM - CHEVROLET OMEGA 0.88 0.91 0.89 75
GM - CHEVROLET ONIX 0.75 0.69 0.71 232
GM - CHEVROLET OPALA 0.83 0.79 0.81 142
GM - CHEVROLET PRISMA 0.78 0.79 0.79 231
GM - CHEVROLET S10 0.91 0.87 0.89 239
GM - CHEVROLET SONIC 0.95 0.96 0.95 54
GM - CHEVROLET SPIN 0.99 0.97 0.98 218
GM - CHEVROLET TRACKER 0.97 0.97 0.97 205
GM - CHEVROLET TRAILBLAZER 0.90 0.90 0.90 94
GM - CHEVROLET VECTRA 0.93 0.88 0.91 211
GM - CHEVROLET ZAFIRA 0.95 0.98 0.97 186
HONDA CITY 0.93 0.93 0.93 229
HONDA CIVIC 0.92 0.91 0.92 241
HONDA CR-V 0.99 0.98 0.98 220
HONDA FIT 0.96 0.95 0.95 231
HONDA HR-V 0.96 0.99 0.98 222
HONDA WR-V 0.99 0.98 0.98 100
HYUNDAI AZERA 0.89 0.97 0.93 97
HYUNDAI CRETA 0.97 1.00 0.98 217
HYUNDAI ELANTRA 0.93 0.95 0.94 98
HYUNDAI HB20 0.81 0.68 0.74 223
HYUNDAI HB20S 0.76 0.88 0.82 244
HYUNDAI HB20X 0.94 0.92 0.93 154
HYUNDAI HR 0.91 0.97 0.94 62
HYUNDAI I30 0.97 0.93 0.95 245
HYUNDAI IX35 0.99 0.97 0.98 226
HYUNDAI SANTA FE 0.96 0.95 0.96 188
HYUNDAI SONATA 0.90 0.98 0.93 44
HYUNDAI TUCSON 0.95 0.98 0.96 212
HYUNDAI VELOSTER 0.90 0.94 0.92 47
JAC J3 0.96 0.96 0.96 71
JEEP CHEROKEE 0.86 0.80 0.83 45
JEEP COMPASS 0.98 0.99 0.99 222
JEEP GRAND CHEROKEE 0.90 0.89 0.90 74
JEEP RENEGADE 0.99 1.00 1.00 214
KIA MOTORS CERATO 0.88 0.91 0.89 163
KIA MOTORS PICANTO 0.97 0.97 0.97 145
KIA MOTORS SORENTO 0.98 0.95 0.96 148
KIA MOTORS SOUL 0.98 0.99 0.99 104
KIA MOTORS SPORTAGE 0.96 0.95 0.95 221
LAND ROVER DISCOVERY 0.95 0.91 0.93 115
LAND ROVER DISCOVERY4 0.93 0.98 0.96 98
LAND ROVER FREELANDER2 0.97 0.93 0.95 82
LAND ROVER RANGE ROVER 0.95 0.95 0.95 244
LIFAN X60 0.96 0.98 0.97 50
MERCEDES-BENZ A-200 0.82 0.85 0.84 54
MERCEDES-BENZ C-180 0.74 0.67 0.70 194
MERCEDES-BENZ C-200 0.48 0.58 0.52 76
MERCEDES-BENZ C-250 0.62 0.80 0.70 46
MERCEDES-BENZ CLASSE A 0.79 0.79 0.79 68
MERCEDES-BENZ GLA 0.96 0.98 0.97 94
MERCEDES-BENZ SPRINTER 0.97 0.95 0.96 195
MINI COOPER 0.97 1.00 0.98 149
MITSUBISHI ASX 0.98 0.96 0.97 222
MITSUBISHI ECLIPSE CROSS 0.95 0.93 0.94 44
MITSUBISHI L200 0.86 0.94 0.90 216
MITSUBISHI LANCER 0.98 0.93 0.95 98
MITSUBISHI OUTLANDER 0.94 0.94 0.94 180
MITSUBISHI PAJERO 0.87 0.82 0.85 225
MITSUBISHI PAJERO SPORT 0.57 0.70 0.63 46
NISSAN FRONTIER 0.95 0.90 0.93 242
NISSAN KICKS 0.99 0.99 0.99 208
NISSAN LIVINA 0.98 0.99 0.98 183
NISSAN MARCH 0.97 0.99 0.98 234
NISSAN SENTRA 0.94 0.93 0.94 206
NISSAN TIIDA 0.99 0.98 0.99 117
NISSAN VERSA 0.97 0.96 0.96 237
PEUGEOT 2008 0.99 0.98 0.98 222
PEUGEOT 206 0.95 0.90 0.93 244
PEUGEOT 207 0.93 0.94 0.94 221
PEUGEOT 208 0.97 0.95 0.96 214
PEUGEOT 3008 0.97 0.96 0.96 93
PEUGEOT 307 0.93 0.95 0.94 195
PEUGEOT 308 0.96 0.96 0.96 201
PEUGEOT 408 0.95 0.96 0.95 156
PORSCHE CAYENNE 0.85 0.98 0.91 52
RENAULT CAPTUR 0.99 1.00 0.99 204
RENAULT CLIO 0.93 0.93 0.93 227
RENAULT DUSTER 0.97 0.97 0.97 200
RENAULT FLUENCE 0.96 0.97 0.96 180
RENAULT KWID 1.00 0.96 0.98 249
RENAULT LOGAN 0.90 0.90 0.90 254
RENAULT MASTER 0.95 0.97 0.96 201
RENAULT MEGANE 0.97 0.95 0.96 235
RENAULT SANDERO 0.81 0.75 0.78 234
RENAULT SCENIC 0.96 0.96 0.96 113
RENAULT STEPWAY 0.55 0.64 0.59 53
RENAULT SYMBOL 0.92 0.97 0.94 60
SUZUKI GRAND VITARA 0.94 0.97 0.96 88
SUZUKI JIMNY 0.99 1.00 0.99 74
SUZUKI VITARA 0.92 0.89 0.90 37
TOYOTA COROLLA 0.90 0.92 0.91 206
TOYOTA ETIOS 0.97 0.98 0.98 220
TOYOTA FIELDER 0.79 0.91 0.85 46
TOYOTA HILUX 0.97 0.93 0.95 215
TOYOTA RAV4 0.94 0.92 0.93 96
TOYOTA YARIS 0.99 0.97 0.98 205
TROLLER T-4 0.98 0.99 0.99 120
VOLVO XC 0.66 0.71 0.69 77
VOLVO XC-60 0.68 0.64 0.66 72
VW - VOLKSWAGEN AMAROK 0.97 0.97 0.97 212
VW - VOLKSWAGEN BORA 0.94 0.94 0.94 52
VW - VOLKSWAGEN BRASILIA 0.87 0.85 0.86 46
VW - VOLKSWAGEN CROSSFOX 0.97 0.94 0.95 213
VW - VOLKSWAGEN FOX 0.86 0.82 0.84 228
VW - VOLKSWAGEN FUSCA 0.96 0.96 0.96 223
VW - VOLKSWAGEN GOL 0.64 0.63 0.64 202
VW - VOLKSWAGEN GOLF 0.92 0.91 0.91 243
VW - VOLKSWAGEN JETTA 0.88 0.91 0.90 204
VW - VOLKSWAGEN KOMBI 0.98 0.97 0.97 221
VW - VOLKSWAGEN NEW 0.96 0.98 0.97 53
VW - VOLKSWAGEN NIVUS 0.97 0.98 0.97 136
VW - VOLKSWAGEN PARATI 0.78 0.82 0.80 251
VW - VOLKSWAGEN PASSAT 0.85 0.86 0.86 155
VW - VOLKSWAGEN POLO 0.84 0.77 0.81 238
VW - VOLKSWAGEN SANTANA 0.85 0.86 0.86 148
VW - VOLKSWAGEN SAVEIRO 0.91 0.91 0.91 223
VW - VOLKSWAGEN SPACEFOX 0.86 0.88 0.87 210
VW - VOLKSWAGEN T-CROSS 0.98 0.99 0.98 225
VW - VOLKSWAGEN TIGUAN 0.94 0.96 0.95 213
VW - VOLKSWAGEN UP! 0.98 0.99 0.99 234
VW - VOLKSWAGEN VIRTUS 0.85 0.88 0.87 227
VW - VOLKSWAGEN VOYAGE 0.81 0.82 0.82 217
accuracy 0.92 31280
macro avg 0.91 0.91 0.91 31280
weighted avg 0.92 0.92 0.92 31280
model.save('models/car_classifier_resnet50v2_1901types_91percert.h5')